In [ ]:
!nvidia-smi
Tue Sep 24 19:33:19 2024       
+---------------------------------------------------------------------------------------+
| NVIDIA-SMI 535.104.05             Driver Version: 535.104.05   CUDA Version: 12.2     |
|-----------------------------------------+----------------------+----------------------+
| GPU  Name                 Persistence-M | Bus-Id        Disp.A | Volatile Uncorr. ECC |
| Fan  Temp   Perf          Pwr:Usage/Cap |         Memory-Usage | GPU-Util  Compute M. |
|                                         |                      |               MIG M. |
|=========================================+======================+======================|
|   0  Tesla T4                       Off | 00000000:00:04.0 Off |                    0 |
| N/A   76C    P0              35W /  70W |   1161MiB / 15360MiB |      0%      Default |
|                                         |                      |                  N/A |
+-----------------------------------------+----------------------+----------------------+
                                                                                         
+---------------------------------------------------------------------------------------+
| Processes:                                                                            |
|  GPU   GI   CI        PID   Type   Process name                            GPU Memory |
|        ID   ID                                                             Usage      |
|=======================================================================================|
+---------------------------------------------------------------------------------------+
In [ ]:
from google.colab import drive
drive.mount('/content/drive')
Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount("/content/drive", force_remount=True).
In [ ]:
!unzip /content/drive/MyDrive/data/archive_copy.zip
In [4]:
import os
import cv2
import tensorflow as tf
import pandas as pd
import numpy as np
In [7]:
df = pd.read_csv('/content/archive copy/sports.csv')
df.head()
Out[7]:
class id filepaths labels data set
0 0 train/air hockey/001.jpg air hockey train
1 0 train/air hockey/002.jpg air hockey train
2 0 train/air hockey/003.jpg air hockey train
3 0 train/air hockey/004.jpg air hockey train
4 0 train/air hockey/005.jpg air hockey train
In [8]:
#Data Augmentation
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from PIL import Image
import os



    # ImageDataGenerator for loading images
    datagen = None
    if set_no == 0:  # trining set
        datagen = ImageDataGenerator(
            rescale=1./255,              # Rescale pixel values from [0, 255] to [0, 1]
            rotation_range=10,           # Random rotations up to 10 degrees
            width_shift_range=0.1,       # Random horizontal shifts by 10%
            height_shift_range=0.1,      # Random vertical shifts by 10%
            shear_range=0.05,            # Shearing transformation
            zoom_range=0.075,            # Random zooms
            horizontal_flip=True,        # Random horizontal flips
            fill_mode='nearest'          # Fill mode for points outside boundaries
        )
    else:
        datagen = ImageDataGenerator(rescale=1.0/255.0)

    # Load images from the combined train and validation set
    data_gen = datagen.flow_from_dataframe(
        dataframe=dataframe,
        directory='/content/archive copy',
        x_col='filepaths',
        y_col='labels',
        target_size=(224, 224),
        batch_size=64,
        class_mode='categorical'
    )


    print("\n\n\n")
    return (dataframe, data_gen)
In [8]:
train_dir = '/content/archive copy/train'
test_dir = '/content/archive copy/test'
valid_dir = '/content/archive copy/valid'

# image generator to load in tf

train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale=1./255)
train_generator = train_datagen.flow_from_directory(
    train_dir,
    target_size=(224, 224),
    batch_size=64,
    class_mode='categorical'
)

# Validation data generator
valid_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale=1./255)
valid_generator = valid_datagen.flow_from_directory(
    valid_dir,
    target_size=(224, 224),
    batch_size=64,
    class_mode='categorical'
)

# Test data generator
test_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale=1./255)
test_generator = test_datagen.flow_from_directory(
    test_dir,
    target_size=(224, 224),
    batch_size=64,
    class_mode='categorical'
)
Found 13492 images belonging to 100 classes.
Found 500 images belonging to 100 classes.
Found 500 images belonging to 100 classes.
In [10]:
from tensorflow.keras import layers, models
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout, Input, BatchNormalization
from tensorflow.keras.optimizers import Adam
from keras.optimizers import RMSprop
from keras.callbacks import EarlyStopping, ReduceLROnPlateau
from keras.applications.xception import Xception

# Create Xception base model
input_shape = (224, 224, 3)
base_model_xception = Xception(weights='imagenet', include_top=False, input_shape= input_shape)

for layer in base_model_xception.layers:
    layer.trainable = False
# Create a Sequential model
model_xception = models.Sequential()

# Adding the Xception base model
model_xception.add(base_model_xception)

# Adding Global Average Pooling 2D layer
model_xception.add(layers.GlobalAveragePooling2D())

# Adding a Dense layer with 1024 units and 'relu' activation
# model_xception.add(Dense(1024, activation='relu'))

# Adding Dropout layer with a dropout rate of 0.5
model_xception.add(layers.Dropout(0.25))

# Adding another Dense layer with 512 units and 'relu' activation
# model_xception.add(Dense(512, activation='relu'))

# Adding Dropout layer with a dropout rate of 0.3
# model_xception.add(Dropout(0.3))

# Adding the final Dense layer with 100 units and 'softmax' activation
model_xception.add(layers.Dense(100, activation='softmax'))
In [14]:
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np

# Function to plot bar charts for class distribution
def plot_class_distribution_bar(train_generator, val_generator, test_generator):
    plt.figure(figsize=(50, 10))

    # Get the class labels from the generators
    class_labels = list(train_generator.class_indices.keys())

    # Train set
    plt.subplot(1, 3, 1)
    train_class_counts = pd.Series(train_generator.classes).value_counts().sort_index()
    plt.bar(np.arange(len(class_labels)), train_class_counts.values, tick_label=class_labels, width=0.7)
    plt.xticks(rotation=45, fontsize=10)
    plt.yticks(fontsize=12)
    plt.title("Class Distribution - TRAIN", fontsize=16)
    plt.xlabel("Classes", fontsize=12)
    plt.ylabel("Count", fontsize=12)

    # Validation set
    plt.subplot(1, 3, 2)
    val_class_counts = pd.Series(val_generator.classes).value_counts().sort_index()
    plt.bar(np.arange(len(class_labels)), val_class_counts.values, tick_label=class_labels, width=0.7)
    plt.xticks(rotation=45, fontsize=10)
    plt.yticks(fontsize=12)
    plt.title("Class Distribution - VALIDATION", fontsize=16)
    plt.xlabel("Classes", fontsize=12)
    plt.ylabel("Count", fontsize=12)

    # Test set
    plt.subplot(1, 3, 3)
    test_class_counts = pd.Series(test_generator.classes).value_counts().sort_index()
    plt.bar(np.arange(len(class_labels)), test_class_counts.values, tick_label=class_labels, width=0.7)
    plt.xticks(rotation=45, fontsize=10)
    plt.yticks(fontsize=12)
    plt.title("Class Distribution - TEST", fontsize=16)
    plt.xlabel("Classes", fontsize=12)
    plt.ylabel("Count", fontsize=12)

    plt.tight_layout()
    plt.show()

# Function call
plot_class_distribution_bar(train_generator, valid_generator, test_generator)
No description has been provided for this image
In [13]:
#!pip install pydot
#!pip install graphviz
#base model graph of xception
from tensorflow.keras.utils import plot_model

# Plot and save the model as an image file
plot_model(base_model_xception, to_file='model_xception.png', show_shapes=True, show_layer_names=True, dpi=96)
Out[13]:
No description has been provided for this image
In [12]:
base_model_xception.summary()
Model: "xception"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┓
┃ Layer (type)              ┃ Output Shape           ┃        Param # ┃ Connected to           ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━┩
│ input_layer_1             │ (None, 224, 224, 3)    │              0 │ -                      │
│ (InputLayer)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block1_conv1 (Conv2D)     │ (None, 111, 111, 32)   │            864 │ input_layer_1[0][0]    │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block1_conv1_bn           │ (None, 111, 111, 32)   │            128 │ block1_conv1[0][0]     │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block1_conv1_act          │ (None, 111, 111, 32)   │              0 │ block1_conv1_bn[0][0]  │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block1_conv2 (Conv2D)     │ (None, 109, 109, 64)   │         18,432 │ block1_conv1_act[0][0] │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block1_conv2_bn           │ (None, 109, 109, 64)   │            256 │ block1_conv2[0][0]     │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block1_conv2_act          │ (None, 109, 109, 64)   │              0 │ block1_conv2_bn[0][0]  │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block2_sepconv1           │ (None, 109, 109, 128)  │          8,768 │ block1_conv2_act[0][0] │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block2_sepconv1_bn        │ (None, 109, 109, 128)  │            512 │ block2_sepconv1[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block2_sepconv2_act       │ (None, 109, 109, 128)  │              0 │ block2_sepconv1_bn[0]… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block2_sepconv2           │ (None, 109, 109, 128)  │         17,536 │ block2_sepconv2_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block2_sepconv2_bn        │ (None, 109, 109, 128)  │            512 │ block2_sepconv2[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ conv2d_4 (Conv2D)         │ (None, 55, 55, 128)    │          8,192 │ block1_conv2_act[0][0] │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block2_pool               │ (None, 55, 55, 128)    │              0 │ block2_sepconv2_bn[0]… │
│ (MaxPooling2D)            │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ batch_normalization_4     │ (None, 55, 55, 128)    │            512 │ conv2d_4[0][0]         │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ add_12 (Add)              │ (None, 55, 55, 128)    │              0 │ block2_pool[0][0],     │
│                           │                        │                │ batch_normalization_4… │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block3_sepconv1_act       │ (None, 55, 55, 128)    │              0 │ add_12[0][0]           │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block3_sepconv1           │ (None, 55, 55, 256)    │         33,920 │ block3_sepconv1_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block3_sepconv1_bn        │ (None, 55, 55, 256)    │          1,024 │ block3_sepconv1[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block3_sepconv2_act       │ (None, 55, 55, 256)    │              0 │ block3_sepconv1_bn[0]… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block3_sepconv2           │ (None, 55, 55, 256)    │         67,840 │ block3_sepconv2_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block3_sepconv2_bn        │ (None, 55, 55, 256)    │          1,024 │ block3_sepconv2[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ conv2d_5 (Conv2D)         │ (None, 28, 28, 256)    │         32,768 │ add_12[0][0]           │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block3_pool               │ (None, 28, 28, 256)    │              0 │ block3_sepconv2_bn[0]… │
│ (MaxPooling2D)            │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ batch_normalization_5     │ (None, 28, 28, 256)    │          1,024 │ conv2d_5[0][0]         │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ add_13 (Add)              │ (None, 28, 28, 256)    │              0 │ block3_pool[0][0],     │
│                           │                        │                │ batch_normalization_5… │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block4_sepconv1_act       │ (None, 28, 28, 256)    │              0 │ add_13[0][0]           │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block4_sepconv1           │ (None, 28, 28, 728)    │        188,672 │ block4_sepconv1_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block4_sepconv1_bn        │ (None, 28, 28, 728)    │          2,912 │ block4_sepconv1[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block4_sepconv2_act       │ (None, 28, 28, 728)    │              0 │ block4_sepconv1_bn[0]… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block4_sepconv2           │ (None, 28, 28, 728)    │        536,536 │ block4_sepconv2_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block4_sepconv2_bn        │ (None, 28, 28, 728)    │          2,912 │ block4_sepconv2[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ conv2d_6 (Conv2D)         │ (None, 14, 14, 728)    │        186,368 │ add_13[0][0]           │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block4_pool               │ (None, 14, 14, 728)    │              0 │ block4_sepconv2_bn[0]… │
│ (MaxPooling2D)            │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ batch_normalization_6     │ (None, 14, 14, 728)    │          2,912 │ conv2d_6[0][0]         │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ add_14 (Add)              │ (None, 14, 14, 728)    │              0 │ block4_pool[0][0],     │
│                           │                        │                │ batch_normalization_6… │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block5_sepconv1_act       │ (None, 14, 14, 728)    │              0 │ add_14[0][0]           │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block5_sepconv1           │ (None, 14, 14, 728)    │        536,536 │ block5_sepconv1_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block5_sepconv1_bn        │ (None, 14, 14, 728)    │          2,912 │ block5_sepconv1[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block5_sepconv2_act       │ (None, 14, 14, 728)    │              0 │ block5_sepconv1_bn[0]… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block5_sepconv2           │ (None, 14, 14, 728)    │        536,536 │ block5_sepconv2_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block5_sepconv2_bn        │ (None, 14, 14, 728)    │          2,912 │ block5_sepconv2[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block5_sepconv3_act       │ (None, 14, 14, 728)    │              0 │ block5_sepconv2_bn[0]… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block5_sepconv3           │ (None, 14, 14, 728)    │        536,536 │ block5_sepconv3_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block5_sepconv3_bn        │ (None, 14, 14, 728)    │          2,912 │ block5_sepconv3[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ add_15 (Add)              │ (None, 14, 14, 728)    │              0 │ block5_sepconv3_bn[0]… │
│                           │                        │                │ add_14[0][0]           │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block6_sepconv1_act       │ (None, 14, 14, 728)    │              0 │ add_15[0][0]           │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block6_sepconv1           │ (None, 14, 14, 728)    │        536,536 │ block6_sepconv1_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block6_sepconv1_bn        │ (None, 14, 14, 728)    │          2,912 │ block6_sepconv1[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block6_sepconv2_act       │ (None, 14, 14, 728)    │              0 │ block6_sepconv1_bn[0]… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block6_sepconv2           │ (None, 14, 14, 728)    │        536,536 │ block6_sepconv2_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block6_sepconv2_bn        │ (None, 14, 14, 728)    │          2,912 │ block6_sepconv2[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block6_sepconv3_act       │ (None, 14, 14, 728)    │              0 │ block6_sepconv2_bn[0]… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block6_sepconv3           │ (None, 14, 14, 728)    │        536,536 │ block6_sepconv3_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block6_sepconv3_bn        │ (None, 14, 14, 728)    │          2,912 │ block6_sepconv3[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ add_16 (Add)              │ (None, 14, 14, 728)    │              0 │ block6_sepconv3_bn[0]… │
│                           │                        │                │ add_15[0][0]           │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block7_sepconv1_act       │ (None, 14, 14, 728)    │              0 │ add_16[0][0]           │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block7_sepconv1           │ (None, 14, 14, 728)    │        536,536 │ block7_sepconv1_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block7_sepconv1_bn        │ (None, 14, 14, 728)    │          2,912 │ block7_sepconv1[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block7_sepconv2_act       │ (None, 14, 14, 728)    │              0 │ block7_sepconv1_bn[0]… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block7_sepconv2           │ (None, 14, 14, 728)    │        536,536 │ block7_sepconv2_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block7_sepconv2_bn        │ (None, 14, 14, 728)    │          2,912 │ block7_sepconv2[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block7_sepconv3_act       │ (None, 14, 14, 728)    │              0 │ block7_sepconv2_bn[0]… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block7_sepconv3           │ (None, 14, 14, 728)    │        536,536 │ block7_sepconv3_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block7_sepconv3_bn        │ (None, 14, 14, 728)    │          2,912 │ block7_sepconv3[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ add_17 (Add)              │ (None, 14, 14, 728)    │              0 │ block7_sepconv3_bn[0]… │
│                           │                        │                │ add_16[0][0]           │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block8_sepconv1_act       │ (None, 14, 14, 728)    │              0 │ add_17[0][0]           │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block8_sepconv1           │ (None, 14, 14, 728)    │        536,536 │ block8_sepconv1_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block8_sepconv1_bn        │ (None, 14, 14, 728)    │          2,912 │ block8_sepconv1[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block8_sepconv2_act       │ (None, 14, 14, 728)    │              0 │ block8_sepconv1_bn[0]… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block8_sepconv2           │ (None, 14, 14, 728)    │        536,536 │ block8_sepconv2_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block8_sepconv2_bn        │ (None, 14, 14, 728)    │          2,912 │ block8_sepconv2[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block8_sepconv3_act       │ (None, 14, 14, 728)    │              0 │ block8_sepconv2_bn[0]… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block8_sepconv3           │ (None, 14, 14, 728)    │        536,536 │ block8_sepconv3_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block8_sepconv3_bn        │ (None, 14, 14, 728)    │          2,912 │ block8_sepconv3[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ add_18 (Add)              │ (None, 14, 14, 728)    │              0 │ block8_sepconv3_bn[0]… │
│                           │                        │                │ add_17[0][0]           │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block9_sepconv1_act       │ (None, 14, 14, 728)    │              0 │ add_18[0][0]           │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block9_sepconv1           │ (None, 14, 14, 728)    │        536,536 │ block9_sepconv1_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block9_sepconv1_bn        │ (None, 14, 14, 728)    │          2,912 │ block9_sepconv1[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block9_sepconv2_act       │ (None, 14, 14, 728)    │              0 │ block9_sepconv1_bn[0]… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block9_sepconv2           │ (None, 14, 14, 728)    │        536,536 │ block9_sepconv2_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block9_sepconv2_bn        │ (None, 14, 14, 728)    │          2,912 │ block9_sepconv2[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block9_sepconv3_act       │ (None, 14, 14, 728)    │              0 │ block9_sepconv2_bn[0]… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block9_sepconv3           │ (None, 14, 14, 728)    │        536,536 │ block9_sepconv3_act[0… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block9_sepconv3_bn        │ (None, 14, 14, 728)    │          2,912 │ block9_sepconv3[0][0]  │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ add_19 (Add)              │ (None, 14, 14, 728)    │              0 │ block9_sepconv3_bn[0]… │
│                           │                        │                │ add_18[0][0]           │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block10_sepconv1_act      │ (None, 14, 14, 728)    │              0 │ add_19[0][0]           │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block10_sepconv1          │ (None, 14, 14, 728)    │        536,536 │ block10_sepconv1_act[… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block10_sepconv1_bn       │ (None, 14, 14, 728)    │          2,912 │ block10_sepconv1[0][0] │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block10_sepconv2_act      │ (None, 14, 14, 728)    │              0 │ block10_sepconv1_bn[0… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block10_sepconv2          │ (None, 14, 14, 728)    │        536,536 │ block10_sepconv2_act[… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block10_sepconv2_bn       │ (None, 14, 14, 728)    │          2,912 │ block10_sepconv2[0][0] │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block10_sepconv3_act      │ (None, 14, 14, 728)    │              0 │ block10_sepconv2_bn[0… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block10_sepconv3          │ (None, 14, 14, 728)    │        536,536 │ block10_sepconv3_act[… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block10_sepconv3_bn       │ (None, 14, 14, 728)    │          2,912 │ block10_sepconv3[0][0] │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ add_20 (Add)              │ (None, 14, 14, 728)    │              0 │ block10_sepconv3_bn[0… │
│                           │                        │                │ add_19[0][0]           │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block11_sepconv1_act      │ (None, 14, 14, 728)    │              0 │ add_20[0][0]           │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block11_sepconv1          │ (None, 14, 14, 728)    │        536,536 │ block11_sepconv1_act[… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block11_sepconv1_bn       │ (None, 14, 14, 728)    │          2,912 │ block11_sepconv1[0][0] │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block11_sepconv2_act      │ (None, 14, 14, 728)    │              0 │ block11_sepconv1_bn[0… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block11_sepconv2          │ (None, 14, 14, 728)    │        536,536 │ block11_sepconv2_act[… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block11_sepconv2_bn       │ (None, 14, 14, 728)    │          2,912 │ block11_sepconv2[0][0] │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block11_sepconv3_act      │ (None, 14, 14, 728)    │              0 │ block11_sepconv2_bn[0… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block11_sepconv3          │ (None, 14, 14, 728)    │        536,536 │ block11_sepconv3_act[… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block11_sepconv3_bn       │ (None, 14, 14, 728)    │          2,912 │ block11_sepconv3[0][0] │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ add_21 (Add)              │ (None, 14, 14, 728)    │              0 │ block11_sepconv3_bn[0… │
│                           │                        │                │ add_20[0][0]           │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block12_sepconv1_act      │ (None, 14, 14, 728)    │              0 │ add_21[0][0]           │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block12_sepconv1          │ (None, 14, 14, 728)    │        536,536 │ block12_sepconv1_act[… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block12_sepconv1_bn       │ (None, 14, 14, 728)    │          2,912 │ block12_sepconv1[0][0] │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block12_sepconv2_act      │ (None, 14, 14, 728)    │              0 │ block12_sepconv1_bn[0… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block12_sepconv2          │ (None, 14, 14, 728)    │        536,536 │ block12_sepconv2_act[… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block12_sepconv2_bn       │ (None, 14, 14, 728)    │          2,912 │ block12_sepconv2[0][0] │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block12_sepconv3_act      │ (None, 14, 14, 728)    │              0 │ block12_sepconv2_bn[0… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block12_sepconv3          │ (None, 14, 14, 728)    │        536,536 │ block12_sepconv3_act[… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block12_sepconv3_bn       │ (None, 14, 14, 728)    │          2,912 │ block12_sepconv3[0][0] │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ add_22 (Add)              │ (None, 14, 14, 728)    │              0 │ block12_sepconv3_bn[0… │
│                           │                        │                │ add_21[0][0]           │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block13_sepconv1_act      │ (None, 14, 14, 728)    │              0 │ add_22[0][0]           │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block13_sepconv1          │ (None, 14, 14, 728)    │        536,536 │ block13_sepconv1_act[… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block13_sepconv1_bn       │ (None, 14, 14, 728)    │          2,912 │ block13_sepconv1[0][0] │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block13_sepconv2_act      │ (None, 14, 14, 728)    │              0 │ block13_sepconv1_bn[0… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block13_sepconv2          │ (None, 14, 14, 1024)   │        752,024 │ block13_sepconv2_act[… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block13_sepconv2_bn       │ (None, 14, 14, 1024)   │          4,096 │ block13_sepconv2[0][0] │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ conv2d_7 (Conv2D)         │ (None, 7, 7, 1024)     │        745,472 │ add_22[0][0]           │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block13_pool              │ (None, 7, 7, 1024)     │              0 │ block13_sepconv2_bn[0… │
│ (MaxPooling2D)            │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ batch_normalization_7     │ (None, 7, 7, 1024)     │          4,096 │ conv2d_7[0][0]         │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ add_23 (Add)              │ (None, 7, 7, 1024)     │              0 │ block13_pool[0][0],    │
│                           │                        │                │ batch_normalization_7… │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block14_sepconv1          │ (None, 7, 7, 1536)     │      1,582,080 │ add_23[0][0]           │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block14_sepconv1_bn       │ (None, 7, 7, 1536)     │          6,144 │ block14_sepconv1[0][0] │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block14_sepconv1_act      │ (None, 7, 7, 1536)     │              0 │ block14_sepconv1_bn[0… │
│ (Activation)              │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block14_sepconv2          │ (None, 7, 7, 2048)     │      3,159,552 │ block14_sepconv1_act[… │
│ (SeparableConv2D)         │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block14_sepconv2_bn       │ (None, 7, 7, 2048)     │          8,192 │ block14_sepconv2[0][0] │
│ (BatchNormalization)      │                        │                │                        │
├───────────────────────────┼────────────────────────┼────────────────┼────────────────────────┤
│ block14_sepconv2_act      │ (None, 7, 7, 2048)     │              0 │ block14_sepconv2_bn[0… │
│ (Activation)              │                        │                │                        │
└───────────────────────────┴────────────────────────┴────────────────┴────────────────────────┘
 Total params: 20,861,480 (79.58 MB)
 Trainable params: 0 (0.00 B)
 Non-trainable params: 20,861,480 (79.58 MB)
In [14]:
# Define optimizer with specific parameters
reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=3, min_lr=1e-7)

model_xception.compile(optimizer='adam',loss='categorical_crossentropy'
                 ,metrics=['accuracy','Precision','Recall'])
In [ ]:
from keras.callbacks import EarlyStopping
early_stopping = EarlyStopping(monitor='val_loss', patience=5, restore_best_weights=True)
history2=model_xception.fit(
        train_generator,
        epochs=20,
        validation_data=valid_generator,
        callbacks=[early_stopping, reduce_lr])
Epoch 1/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 352s 1s/step - Precision: 0.8315 - Recall: 0.3810 - accuracy: 0.5467 - loss: 1.9570 - val_Precision: 0.7682 - val_Recall: 0.6760 - val_accuracy: 0.7120 - val_loss: 1.0929 - learning_rate: 0.0010
Epoch 2/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 262s 988ms/step - Precision: 0.9464 - Recall: 0.8887 - accuracy: 0.9173 - loss: 0.2795 - val_Precision: 0.8447 - val_Recall: 0.7180 - val_accuracy: 0.7720 - val_loss: 0.7871 - learning_rate: 0.0010
Epoch 3/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 210s 988ms/step - Precision: 0.9656 - Recall: 0.9340 - accuracy: 0.9504 - loss: 0.1713 - val_Precision: 0.8412 - val_Recall: 0.7100 - val_accuracy: 0.7620 - val_loss: 0.8412 - learning_rate: 0.0010
Epoch 4/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 209s 987ms/step - Precision: 0.9681 - Recall: 0.9468 - accuracy: 0.9577 - loss: 0.1390 - val_Precision: 0.8872 - val_Recall: 0.8180 - val_accuracy: 0.8560 - val_loss: 0.5278 - learning_rate: 0.0010
Epoch 5/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 210s 989ms/step - Precision: 0.9759 - Recall: 0.9611 - accuracy: 0.9687 - loss: 0.1057 - val_Precision: 0.8597 - val_Recall: 0.7720 - val_accuracy: 0.7960 - val_loss: 0.7402 - learning_rate: 0.0010
Epoch 6/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 262s 989ms/step - Precision: 0.9713 - Recall: 0.9561 - accuracy: 0.9630 - loss: 0.1167 - val_Precision: 0.8882 - val_Recall: 0.8260 - val_accuracy: 0.8560 - val_loss: 0.4965 - learning_rate: 0.0010
Epoch 7/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 262s 990ms/step - Precision: 0.9714 - Recall: 0.9573 - accuracy: 0.9656 - loss: 0.1161 - val_Precision: 0.9296 - val_Recall: 0.8720 - val_accuracy: 0.9040 - val_loss: 0.3446 - learning_rate: 0.0010
Epoch 8/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 261s 986ms/step - Precision: 0.9862 - Recall: 0.9800 - accuracy: 0.9826 - loss: 0.0582 - val_Precision: 0.9083 - val_Recall: 0.8720 - val_accuracy: 0.8860 - val_loss: 0.3651 - learning_rate: 0.0010
Epoch 9/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 209s 986ms/step - Precision: 0.9802 - Recall: 0.9723 - accuracy: 0.9760 - loss: 0.0745 - val_Precision: 0.9113 - val_Recall: 0.8840 - val_accuracy: 0.8960 - val_loss: 0.4146 - learning_rate: 0.0010
Epoch 10/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 209s 984ms/step - Precision: 0.9834 - Recall: 0.9745 - accuracy: 0.9782 - loss: 0.0667 - val_Precision: 0.8794 - val_Recall: 0.8460 - val_accuracy: 0.8480 - val_loss: 0.5503 - learning_rate: 0.0010
Epoch 11/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 262s 987ms/step - Precision: 0.9898 - Recall: 0.9866 - accuracy: 0.9879 - loss: 0.0475 - val_Precision: 0.9534 - val_Recall: 0.9420 - val_accuracy: 0.9440 - val_loss: 0.1943 - learning_rate: 1.0000e-04
Epoch 12/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 209s 989ms/step - Precision: 0.9997 - Recall: 0.9996 - accuracy: 0.9997 - loss: 0.0052 - val_Precision: 0.9515 - val_Recall: 0.9420 - val_accuracy: 0.9460 - val_loss: 0.1852 - learning_rate: 1.0000e-04
Epoch 13/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 209s 987ms/step - Precision: 0.9999 - Recall: 0.9999 - accuracy: 0.9999 - loss: 0.0031 - val_Precision: 0.9497 - val_Recall: 0.9440 - val_accuracy: 0.9440 - val_loss: 0.1757 - learning_rate: 1.0000e-04
Epoch 14/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 209s 986ms/step - Precision: 0.9998 - Recall: 0.9998 - accuracy: 0.9998 - loss: 0.0021 - val_Precision: 0.9555 - val_Recall: 0.9440 - val_accuracy: 0.9480 - val_loss: 0.1691 - learning_rate: 1.0000e-04
Epoch 15/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 262s 989ms/step - Precision: 1.0000 - Recall: 1.0000 - accuracy: 1.0000 - loss: 0.0018 - val_Precision: 0.9556 - val_Recall: 0.9480 - val_accuracy: 0.9520 - val_loss: 0.1676 - learning_rate: 1.0000e-04
Epoch 16/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 210s 990ms/step - Precision: 0.9996 - Recall: 0.9994 - accuracy: 0.9996 - loss: 0.0018 - val_Precision: 0.9537 - val_Recall: 0.9480 - val_accuracy: 0.9520 - val_loss: 0.1667 - learning_rate: 1.0000e-04
Epoch 17/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 261s 987ms/step - Precision: 1.0000 - Recall: 1.0000 - accuracy: 1.0000 - loss: 0.0014 - val_Precision: 0.9556 - val_Recall: 0.9480 - val_accuracy: 0.9520 - val_loss: 0.1697 - learning_rate: 1.0000e-04
Epoch 18/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 262s 990ms/step - Precision: 1.0000 - Recall: 1.0000 - accuracy: 1.0000 - loss: 0.0012 - val_Precision: 0.9517 - val_Recall: 0.9460 - val_accuracy: 0.9500 - val_loss: 0.1668 - learning_rate: 1.0000e-04
Epoch 19/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 261s 987ms/step - Precision: 1.0000 - Recall: 1.0000 - accuracy: 1.0000 - loss: 7.2876e-04 - val_Precision: 0.9536 - val_Recall: 0.9460 - val_accuracy: 0.9500 - val_loss: 0.1663 - learning_rate: 1.0000e-04
Epoch 20/20
211/211 ━━━━━━━━━━━━━━━━━━━━ 263s 988ms/step - Precision: 1.0000 - Recall: 1.0000 - accuracy: 1.0000 - loss: 6.2921e-04 - val_Precision: 0.9556 - val_Recall: 0.9480 - val_accuracy: 0.9520 - val_loss: 0.1680 - learning_rate: 1.0000e-04
In [15]:
probabilities = model_xception.predict(test_generator)

# Print the probabilities for each image
print(probabilities)
/usr/local/lib/python3.10/dist-packages/keras/src/trainers/data_adapters/py_dataset_adapter.py:121: UserWarning: Your `PyDataset` class should call `super().__init__(**kwargs)` in its constructor. `**kwargs` can include `workers`, `use_multiprocessing`, `max_queue_size`. Do not pass these arguments to `fit()`, as they will be ignored.
  self._warn_if_super_not_called()
8/8 ━━━━━━━━━━━━━━━━━━━━ 132s 16s/step
[[0.01260355 0.00864721 0.00669275 ... 0.00959175 0.00866077 0.01121298]
 [0.00642109 0.00831076 0.00523311 ... 0.0040834  0.00401792 0.0130676 ]
 [0.00972067 0.0071232  0.00435577 ... 0.00546306 0.00871662 0.01660202]
 ...
 [0.01169497 0.0158292  0.00530293 ... 0.01637501 0.008435   0.00985603]
 [0.01067258 0.0071513  0.00501839 ... 0.01530412 0.00756587 0.00906572]
 [0.00580277 0.0135356  0.00556272 ... 0.01069322 0.00497533 0.0235156 ]]
In [20]:
probabilities = model_xception.predict(test_generator)

true_classes = test_generator.classes

class_labels = list(test_generator.class_indices.keys())

df = pd.DataFrame(probabilities, columns=class_labels)
df['True_Label'] = [class_labels[i] for i in true_classes]

print(df.head(30))

df.to_csv("test_predictions_with_labels.csv", index=False)
8/8 ━━━━━━━━━━━━━━━━━━━━ 128s 16s/step
    air hockey  ampute football   archery  arm wrestling  axe throwing  \
0     0.012604         0.008647  0.006693       0.016852      0.006011   
1     0.004604         0.007581  0.005031       0.017245      0.005709   
2     0.010944         0.011940  0.009752       0.009728      0.004617   
3     0.009352         0.006081  0.008333       0.017955      0.009456   
4     0.009322         0.007058  0.004194       0.009203      0.007127   
5     0.013635         0.008076  0.008492       0.023252      0.004292   
6     0.013884         0.011970  0.006827       0.016050      0.005731   
7     0.015329         0.008110  0.003425       0.008076      0.004920   
8     0.004512         0.003933  0.002885       0.018178      0.009681   
9     0.005661         0.008250  0.005051       0.013587      0.005793   
10    0.007940         0.004579  0.007304       0.016856      0.007283   
11    0.005207         0.005513  0.010913       0.009125      0.004576   
12    0.011319         0.007861  0.007714       0.014540      0.007673   
13    0.012650         0.007754  0.006731       0.009937      0.003112   
14    0.006626         0.005397  0.005946       0.016764      0.004351   
15    0.006907         0.007279  0.017099       0.013772      0.005547   
16    0.012076         0.010348  0.005090       0.015976      0.006193   
17    0.008825         0.008034  0.009304       0.023492      0.007518   
18    0.011040         0.008711  0.005811       0.034430      0.006559   
19    0.010788         0.008962  0.009834       0.014343      0.004010   
20    0.011978         0.005642  0.006988       0.023910      0.007088   
21    0.019116         0.006901  0.004931       0.017442      0.007551   
22    0.007506         0.004876  0.006365       0.023695      0.003055   
23    0.006666         0.006996  0.004938       0.012363      0.006749   
24    0.014283         0.009007  0.014236       0.020204      0.001761   
25    0.007203         0.010159  0.006926       0.015642      0.005747   
26    0.009800         0.011299  0.009893       0.012883      0.008886   
27    0.006125         0.012993  0.009873       0.012466      0.009282   
28    0.010681         0.008086  0.007521       0.016608      0.005130   
29    0.008689         0.006217  0.008089       0.018176      0.008040   

    balance beam  barell racing  baseball  basketball  baton twirling  ...  \
0       0.005017       0.006530  0.021128    0.007453        0.008672  ...   
1       0.004718       0.005458  0.015262    0.014013        0.020885  ...   
2       0.002594       0.007111  0.013182    0.005575        0.018682  ...   
3       0.015268       0.012229  0.015111    0.005895        0.016880  ...   
4       0.004576       0.005164  0.008042    0.005088        0.014374  ...   
5       0.007847       0.010107  0.011976    0.006278        0.016478  ...   
6       0.006180       0.010967  0.010557    0.008525        0.014783  ...   
7       0.006836       0.008591  0.011280    0.006581        0.021812  ...   
8       0.008199       0.004523  0.019353    0.008459        0.011437  ...   
9       0.005515       0.007227  0.015023    0.015703        0.010720  ...   
10      0.004640       0.006133  0.017021    0.008256        0.022085  ...   
11      0.023077       0.010249  0.012590    0.012095        0.016825  ...   
12      0.005669       0.006628  0.014961    0.005848        0.013062  ...   
13      0.004230       0.004454  0.029343    0.007035        0.013263  ...   
14      0.004747       0.006792  0.011247    0.003783        0.019102  ...   
15      0.016043       0.013837  0.013607    0.009551        0.008709  ...   
16      0.007662       0.009777  0.010754    0.007410        0.015433  ...   
17      0.006370       0.010321  0.024480    0.003731        0.014021  ...   
18      0.005381       0.006842  0.014454    0.007597        0.008281  ...   
19      0.004509       0.005511  0.022337    0.006555        0.007336  ...   
20      0.004097       0.002597  0.032009    0.003393        0.011602  ...   
21      0.006643       0.008810  0.015372    0.007859        0.011475  ...   
22      0.007201       0.007992  0.013972    0.004675        0.033888  ...   
23      0.005571       0.009248  0.023677    0.008305        0.011939  ...   
24      0.005095       0.005521  0.024472    0.004030        0.019071  ...   
25      0.005640       0.012916  0.021859    0.011972        0.012678  ...   
26      0.007561       0.005639  0.016381    0.008789        0.015676  ...   
27      0.004251       0.007203  0.010177    0.009138        0.011457  ...   
28      0.002337       0.002851  0.009626    0.013541        0.014113  ...   
29      0.006498       0.008321  0.008830    0.005277        0.007876  ...   

    ultimate  uneven bars  volleyball  water cycling  water polo  \
0   0.005023     0.008838    0.007621       0.011548    0.010598   
1   0.007036     0.011446    0.006776       0.014365    0.008337   
2   0.011646     0.008786    0.007882       0.011175    0.010618   
3   0.005567     0.006944    0.009596       0.007719    0.005792   
4   0.005480     0.010002    0.021406       0.007551    0.010491   
5   0.004623     0.005240    0.014235       0.011686    0.005370   
6   0.008362     0.005708    0.010824       0.007543    0.006597   
7   0.012535     0.007506    0.010110       0.008928    0.012368   
8   0.004137     0.008896    0.012730       0.013697    0.004902   
9   0.004148     0.007870    0.008078       0.006779    0.011047   
10  0.005078     0.012650    0.014300       0.011773    0.009143   
11  0.006488     0.009658    0.014257       0.006162    0.011458   
12  0.007419     0.009872    0.010685       0.007039    0.004854   
13  0.008879     0.008198    0.010542       0.010438    0.004549   
14  0.003899     0.006829    0.012122       0.014878    0.004969   
15  0.005101     0.010247    0.022988       0.009877    0.010445   
16  0.004371     0.007840    0.009233       0.009389    0.012844   
17  0.005191     0.007553    0.008852       0.012593    0.005319   
18  0.004178     0.013678    0.018806       0.008443    0.006660   
19  0.009252     0.011361    0.010452       0.011752    0.008142   
20  0.003219     0.010108    0.010597       0.013473    0.007943   
21  0.005199     0.006502    0.008950       0.010986    0.005834   
22  0.005864     0.003325    0.015388       0.008936    0.004249   
23  0.002942     0.011648    0.006296       0.007610    0.007260   
24  0.009214     0.011642    0.015616       0.033115    0.005539   
25  0.003538     0.007868    0.006654       0.007141    0.004271   
26  0.008216     0.006489    0.016630       0.016364    0.010041   
27  0.008250     0.008456    0.009867       0.002303    0.008803   
28  0.002540     0.010151    0.019541       0.007922    0.016570   
29  0.003431     0.012607    0.008082       0.013706    0.005587   

    weightlifting  wheelchair basketball  wheelchair racing  wingsuit flying  \
0        0.005499               0.009592           0.008661         0.011213   
1        0.006314               0.007329           0.005257         0.019253   
2        0.004036               0.008790           0.003747         0.011339   
3        0.002509               0.009728           0.004643         0.017328   
4        0.023130               0.012643           0.007515         0.020098   
5        0.013775               0.006150           0.004126         0.015115   
6        0.009395               0.007515           0.004262         0.011263   
7        0.010347               0.005726           0.006855         0.016036   
8        0.006686               0.015898           0.005358         0.013624   
9        0.010610               0.009163           0.004892         0.023544   
10       0.008109               0.006233           0.008805         0.013252   
11       0.010932               0.003932           0.009578         0.007523   
12       0.010290               0.011357           0.005664         0.017409   
13       0.012942               0.008487           0.002625         0.013116   
14       0.006633               0.008799           0.003670         0.015124   
15       0.011754               0.010172           0.008391         0.010435   
16       0.015416               0.005370           0.006445         0.019189   
17       0.011077               0.007728           0.004597         0.019502   
18       0.008088               0.016882           0.006540         0.009668   
19       0.005378               0.006037           0.003598         0.024384   
20       0.016530               0.011136           0.004933         0.010831   
21       0.016483               0.006002           0.009313         0.019204   
22       0.003135               0.008002           0.002613         0.012740   
23       0.011997               0.005756           0.011590         0.011433   
24       0.007696               0.007748           0.002939         0.012174   
25       0.009243               0.004718           0.004248         0.020316   
26       0.007222               0.010715           0.004707         0.015075   
27       0.005059               0.007651           0.004654         0.028665   
28       0.008211               0.014374           0.002393         0.046378   
29       0.010525               0.014791           0.008951         0.011260   

         True_Label  
0        air hockey  
1        air hockey  
2        air hockey  
3        air hockey  
4        air hockey  
5   ampute football  
6   ampute football  
7   ampute football  
8   ampute football  
9   ampute football  
10          archery  
11          archery  
12          archery  
13          archery  
14          archery  
15    arm wrestling  
16    arm wrestling  
17    arm wrestling  
18    arm wrestling  
19    arm wrestling  
20     axe throwing  
21     axe throwing  
22     axe throwing  
23     axe throwing  
24     axe throwing  
25     balance beam  
26     balance beam  
27     balance beam  
28     balance beam  
29     balance beam  

[30 rows x 101 columns]
In [ ]:
import matplotlib.pyplot as plt

def plot_results(history2, title):
    epochs = range(1, len(history2.history['accuracy']) + 1)


    fig, axs = plt.subplots(2, 1, figsize=(10, 10))

    axs[0].plot(epochs, history2.history['loss'], label='Training Loss')
    axs[0].plot(epochs, history2.history['val_loss'], label='Validation Loss')
    axs[0].set_title('Loss vs. Epochs')
    axs[0].set_xlabel('Epochs')
    axs[0].set_ylabel('Loss')
    axs[0].legend()

    axs[1].plot(epochs, history2.history['accuracy'], label='Training Accuracy')
    axs[1].plot(epochs, history2.history['val_accuracy'], label='Validation Accuracy')
    axs[1].set_title('Accuracy vs. Epochs')
    axs[1].set_xlabel('Epochs')
    axs[1].set_ylabel('Accuracy')
    axs[1].legend()

    plt.suptitle(title)
    plt.tight_layout()
    plt.show()

#
plot_results(history2, 'Training and Validation Results')
No description has been provided for this image
In [ ]:
train_loss = history2.history['loss']
train_accuracy = history2.history['accuracy']
val_loss = history2.history['val_loss']
val_accuracy = history2.history['val_accuracy']

print(f"Final Training Loss: {train_loss[-1]}")
print(f"Final Training Accuracy: {train_accuracy[-1]}")
print(f"Final Validation Loss: {val_loss[-1]}")
print(f"Final Validation Accuracy: {val_accuracy[-1]}")
Final Training Loss: 0.0006319913663901389
Final Training Accuracy: 1.0
Final Validation Loss: 0.1679784059524536
Final Validation Accuracy: 0.9520000219345093
In [ ]:
# Evaluate the model on the test set
test_acc = model_xception.evaluate(test_generator)[1]
test_loss = model_xception.evaluate(test_generator)[0]
print(f"Test Loss: {test_loss}")
print(f"Test Accuracy: {test_acc}")
8/8 ━━━━━━━━━━━━━━━━━━━━ 3s 285ms/step - Precision: 0.9809 - Recall: 0.9719 - accuracy: 0.9737 - loss: 0.0757
8/8 ━━━━━━━━━━━━━━━━━━━━ 2s 275ms/step - Precision: 0.9836 - Recall: 0.9700 - accuracy: 0.9814 - loss: 0.0684
Test Loss: 0.07798594981431961
Test Accuracy: 0.9760000109672546
In [21]:
from sklearn.metrics import classification_report, confusion_matrix
import seaborn as sns
import matplotlib.pyplot as plt

# Predict the labels for test data
y_pred = np.argmax(model_xception.predict(test_generator), axis=-1)
y_true = test_generator.classes

# Confusion matrix
cm = confusion_matrix(y_true, y_pred)
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
plt.figure(figsize=(20, 20))
sns.heatmap(cm, annot=False, cmap="Blues", fmt=".2f")
plt.title("Confusion Matrix")
plt.show()
8/8 ━━━━━━━━━━━━━━━━━━━━ 129s 16s/step
No description has been provided for this image
In [ ]:
import numpy as np
import matplotlib.pyplot as plt
from skimage import img_as_float, io

# Get true labels and predicted labels
y_true = test_generator.classes  # True labels from the generator
y_pred = model_xception.predict(test_generator)  # Predictions from the model
y_pred_classes = np.argmax(y_pred, axis=1)  # Get predicted class indices

# Class names (assuming test_generator has a class_indices attribute)
class_names = list(test_generator.class_indices.keys())

# Filepaths of test images from the generator
filepaths = test_generator.filepaths  # Get the filepaths from the generator

# Display correct predictions
correct_indices = np.where(y_pred_classes == y_true)[0]  # Indices where predictions match true labels
for i, idx in enumerate(correct_indices[:5]):  # Display the first 5 correct predictions
    img = img_as_float(io.imread(filepaths[idx]))  # Load the image
    plt.imshow(img)
    plt.title(f"Predicted: {class_names[y_pred_classes[idx]]}, Actual: {class_names[y_true[idx]]}")
    plt.axis('off')  # Hide axis for better visualization
    plt.show()
8/8 ━━━━━━━━━━━━━━━━━━━━ 141s 17s/step
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
In [ ]:
import numpy as np
import matplotlib.pyplot as plt
from skimage import img_as_float, io

# Get true labels and predicted labels
y_true = test_generator.classes  # True labels from the generator
y_pred = model_xception.predict(test_generator)  # Predictions from the model
y_pred_classes = np.argmax(y_pred, axis=1)  # Get predicted class indices

# Class names (assuming test_generator has a class_indices attribute)
class_names = list(test_generator.class_indices.keys())

# Filepaths of test images from the generator
filepaths = test_generator.filepaths  # Get the filepaths from the generator

# Display incorrect predictions
incorrect_indices = np.where(y_pred_classes != y_true)[0]  # Indices where predictions do not match true labels
for i, idx in enumerate(incorrect_indices[:5]):  # Display the first 5 incorrect predictions
    img = img_as_float(io.imread(filepaths[idx]))  # Load the image
    plt.imshow(img)
    plt.title(f"Predicted: {class_names[y_pred_classes[idx]]}, Actual: {class_names[y_true[idx]]}")
    plt.axis('off')  # Hide axis for better visualization
    plt.show()
8/8 ━━━━━━━━━━━━━━━━━━━━ 136s 17s/step
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
In [ ]:
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout, BatchNormalization, GlobalAveragePooling2D
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import ReduceLROnPlateau, EarlyStopping

# Build the CNN model
model = Sequential()

# Block 1
model.add(Conv2D(filters=64, kernel_size=(3, 3), activation='relu', input_shape=(224, 224, 3)))
model.add(BatchNormalization())
model.add(Conv2D(filters=64, kernel_size=(3, 3), activation='relu'))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.3))

# Block 2
model.add(Conv2D(filters=128, kernel_size=(3, 3), activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(filters=128, kernel_size=(3, 3), activation='relu'))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.4))

# Block 3
model.add(Conv2D(filters=256, kernel_size=(3, 3), activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(filters=256, kernel_size=(3, 3), activation='relu'))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.4))

# Block 4
model.add(Conv2D(filters=512, kernel_size=(3, 3), activation='relu'))
model.add(BatchNormalization())
model.add(Conv2D(filters=512, kernel_size=(3, 3), activation='relu'))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.5))

# Global Average Pooling and fully connected layers
model.add(GlobalAveragePooling2D())
model.add(Dense(256, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(100, activation='softmax'))

# Compile the model
model.compile(optimizer=Adam(learning_rate=1e-4),
              loss='categorical_crossentropy',
              metrics=['accuracy'])

# Model summary
model.summary()

# Callbacks for learning rate reduction and early stopping
reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=3, min_lr=1e-7)
early_stopping = EarlyStopping(monitor='val_loss', patience=10, restore_best_weights=True)
/usr/local/lib/python3.10/dist-packages/keras/src/layers/convolutional/base_conv.py:107: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead.
  super().__init__(activity_regularizer=activity_regularizer, **kwargs)
Model: "sequential_1"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┓
┃ Layer (type)                         ┃ Output Shape                ┃         Param # ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━┩
│ conv2d_6 (Conv2D)                    │ (None, 222, 222, 64)        │           1,792 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ batch_normalization_6                │ (None, 222, 222, 64)        │             256 │
│ (BatchNormalization)                 │                             │                 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ conv2d_7 (Conv2D)                    │ (None, 220, 220, 64)        │          36,928 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ batch_normalization_7                │ (None, 220, 220, 64)        │             256 │
│ (BatchNormalization)                 │                             │                 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ max_pooling2d_3 (MaxPooling2D)       │ (None, 110, 110, 64)        │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ dropout_4 (Dropout)                  │ (None, 110, 110, 64)        │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ conv2d_8 (Conv2D)                    │ (None, 108, 108, 128)       │          73,856 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ batch_normalization_8                │ (None, 108, 108, 128)       │             512 │
│ (BatchNormalization)                 │                             │                 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ conv2d_9 (Conv2D)                    │ (None, 106, 106, 128)       │         147,584 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ batch_normalization_9                │ (None, 106, 106, 128)       │             512 │
│ (BatchNormalization)                 │                             │                 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ max_pooling2d_4 (MaxPooling2D)       │ (None, 53, 53, 128)         │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ dropout_5 (Dropout)                  │ (None, 53, 53, 128)         │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ conv2d_10 (Conv2D)                   │ (None, 51, 51, 256)         │         295,168 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ batch_normalization_10               │ (None, 51, 51, 256)         │           1,024 │
│ (BatchNormalization)                 │                             │                 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ conv2d_11 (Conv2D)                   │ (None, 49, 49, 256)         │         590,080 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ batch_normalization_11               │ (None, 49, 49, 256)         │           1,024 │
│ (BatchNormalization)                 │                             │                 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ max_pooling2d_5 (MaxPooling2D)       │ (None, 24, 24, 256)         │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ dropout_6 (Dropout)                  │ (None, 24, 24, 256)         │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ conv2d_12 (Conv2D)                   │ (None, 22, 22, 512)         │       1,180,160 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ batch_normalization_12               │ (None, 22, 22, 512)         │           2,048 │
│ (BatchNormalization)                 │                             │                 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ conv2d_13 (Conv2D)                   │ (None, 20, 20, 512)         │       2,359,808 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ batch_normalization_13               │ (None, 20, 20, 512)         │           2,048 │
│ (BatchNormalization)                 │                             │                 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ max_pooling2d_6 (MaxPooling2D)       │ (None, 10, 10, 512)         │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ dropout_7 (Dropout)                  │ (None, 10, 10, 512)         │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ global_average_pooling2d             │ (None, 512)                 │               0 │
│ (GlobalAveragePooling2D)             │                             │                 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ dense_2 (Dense)                      │ (None, 256)                 │         131,328 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ dropout_8 (Dropout)                  │ (None, 256)                 │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ dense_3 (Dense)                      │ (None, 100)                 │          25,700 │
└──────────────────────────────────────┴─────────────────────────────┴─────────────────┘
 Total params: 4,850,084 (18.50 MB)
 Trainable params: 4,846,244 (18.49 MB)
 Non-trainable params: 3,840 (15.00 KB)
In [ ]:
history = model.fit(
    train_generator,
    validation_data=valid_generator,
    epochs=30,
    callbacks=[reduce_lr, early_stopping]
)
Epoch 1/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 224s 782ms/step - accuracy: 0.0421 - loss: 4.6002 - val_accuracy: 0.0100 - val_loss: 5.8009 - learning_rate: 1.0000e-04
Epoch 2/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.1045 - loss: 3.9259 - val_accuracy: 0.0220 - val_loss: 5.2539 - learning_rate: 1.0000e-04
Epoch 3/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.1536 - loss: 3.6184 - val_accuracy: 0.0380 - val_loss: 4.8880 - learning_rate: 1.0000e-04
Epoch 4/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.1943 - loss: 3.3885 - val_accuracy: 0.0700 - val_loss: 4.1845 - learning_rate: 1.0000e-04
Epoch 5/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.2219 - loss: 3.1993 - val_accuracy: 0.0920 - val_loss: 3.9222 - learning_rate: 1.0000e-04
Epoch 6/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.2589 - loss: 3.0004 - val_accuracy: 0.1000 - val_loss: 4.0832 - learning_rate: 1.0000e-04
Epoch 7/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.2924 - loss: 2.8116 - val_accuracy: 0.1080 - val_loss: 4.0208 - learning_rate: 1.0000e-04
Epoch 8/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.3333 - loss: 2.6597 - val_accuracy: 0.1740 - val_loss: 3.4381 - learning_rate: 1.0000e-04
Epoch 9/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.3620 - loss: 2.4837 - val_accuracy: 0.1840 - val_loss: 3.5901 - learning_rate: 1.0000e-04
Epoch 10/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.3862 - loss: 2.3654 - val_accuracy: 0.2300 - val_loss: 3.3271 - learning_rate: 1.0000e-04
Epoch 11/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.4312 - loss: 2.1966 - val_accuracy: 0.3240 - val_loss: 2.7922 - learning_rate: 1.0000e-04
Epoch 12/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.4520 - loss: 2.1058 - val_accuracy: 0.3140 - val_loss: 2.7857 - learning_rate: 1.0000e-04
Epoch 13/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.4780 - loss: 1.9644 - val_accuracy: 0.2720 - val_loss: 3.1043 - learning_rate: 1.0000e-04
Epoch 14/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.5166 - loss: 1.8223 - val_accuracy: 0.4540 - val_loss: 2.1776 - learning_rate: 1.0000e-04
Epoch 15/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.5418 - loss: 1.7284 - val_accuracy: 0.4240 - val_loss: 2.1841 - learning_rate: 1.0000e-04
Epoch 16/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.5805 - loss: 1.5972 - val_accuracy: 0.4440 - val_loss: 2.2138 - learning_rate: 1.0000e-04
Epoch 17/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.5915 - loss: 1.5282 - val_accuracy: 0.4700 - val_loss: 2.0890 - learning_rate: 1.0000e-04
Epoch 18/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.6131 - loss: 1.4197 - val_accuracy: 0.4820 - val_loss: 2.0619 - learning_rate: 1.0000e-04
Epoch 19/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.6262 - loss: 1.3561 - val_accuracy: 0.4760 - val_loss: 2.0055 - learning_rate: 1.0000e-04
Epoch 20/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.6572 - loss: 1.2633 - val_accuracy: 0.5440 - val_loss: 1.7636 - learning_rate: 1.0000e-04
Epoch 21/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.6806 - loss: 1.1790 - val_accuracy: 0.4660 - val_loss: 1.9863 - learning_rate: 1.0000e-04
Epoch 22/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.6883 - loss: 1.1303 - val_accuracy: 0.5760 - val_loss: 1.6610 - learning_rate: 1.0000e-04
Epoch 23/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.7085 - loss: 1.0556 - val_accuracy: 0.5780 - val_loss: 1.5470 - learning_rate: 1.0000e-04
Epoch 24/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.7213 - loss: 0.9912 - val_accuracy: 0.5860 - val_loss: 1.5393 - learning_rate: 1.0000e-04
Epoch 25/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.7480 - loss: 0.9290 - val_accuracy: 0.5900 - val_loss: 1.5804 - learning_rate: 1.0000e-04
Epoch 26/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.7477 - loss: 0.8966 - val_accuracy: 0.6120 - val_loss: 1.4004 - learning_rate: 1.0000e-04
Epoch 27/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.7743 - loss: 0.8174 - val_accuracy: 0.6500 - val_loss: 1.3302 - learning_rate: 1.0000e-04
Epoch 28/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.7777 - loss: 0.7926 - val_accuracy: 0.6540 - val_loss: 1.3125 - learning_rate: 1.0000e-04
Epoch 29/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.7939 - loss: 0.7323 - val_accuracy: 0.7200 - val_loss: 1.0162 - learning_rate: 1.0000e-04
Epoch 30/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.8072 - loss: 0.6974 - val_accuracy: 0.6860 - val_loss: 1.1897 - learning_rate: 1.0000e-04
Epoch 31/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.8195 - loss: 0.6490 - val_accuracy: 0.7000 - val_loss: 1.0773 - learning_rate: 1.0000e-04
Epoch 32/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.8293 - loss: 0.6115 - val_accuracy: 0.6840 - val_loss: 1.1667 - learning_rate: 1.0000e-04
Epoch 33/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.8507 - loss: 0.5427 - val_accuracy: 0.7240 - val_loss: 1.0380 - learning_rate: 1.0000e-05
Epoch 34/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.8766 - loss: 0.4741 - val_accuracy: 0.7240 - val_loss: 1.0192 - learning_rate: 1.0000e-05
Epoch 35/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.8790 - loss: 0.4571 - val_accuracy: 0.7260 - val_loss: 0.9929 - learning_rate: 1.0000e-05
Epoch 36/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.8840 - loss: 0.4313 - val_accuracy: 0.7260 - val_loss: 1.0103 - learning_rate: 1.0000e-05
Epoch 37/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.8901 - loss: 0.4221 - val_accuracy: 0.7220 - val_loss: 0.9830 - learning_rate: 1.0000e-05
Epoch 38/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.8849 - loss: 0.4195 - val_accuracy: 0.7120 - val_loss: 1.0228 - learning_rate: 1.0000e-05
Epoch 39/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.8929 - loss: 0.4183 - val_accuracy: 0.7300 - val_loss: 0.9960 - learning_rate: 1.0000e-05
Epoch 40/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.8961 - loss: 0.4025 - val_accuracy: 0.7260 - val_loss: 0.9823 - learning_rate: 1.0000e-05
Epoch 41/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.8975 - loss: 0.3909 - val_accuracy: 0.7120 - val_loss: 1.0111 - learning_rate: 1.0000e-05
Epoch 42/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.8967 - loss: 0.3945 - val_accuracy: 0.7260 - val_loss: 0.9689 - learning_rate: 1.0000e-05
Epoch 43/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.9010 - loss: 0.3732 - val_accuracy: 0.7360 - val_loss: 0.9592 - learning_rate: 1.0000e-05
Epoch 44/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9070 - loss: 0.3738 - val_accuracy: 0.7240 - val_loss: 0.9601 - learning_rate: 1.0000e-05
Epoch 45/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.8966 - loss: 0.3859 - val_accuracy: 0.7220 - val_loss: 0.9745 - learning_rate: 1.0000e-05
Epoch 46/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.9025 - loss: 0.3711 - val_accuracy: 0.7260 - val_loss: 0.9627 - learning_rate: 1.0000e-05
Epoch 47/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.8997 - loss: 0.3729 - val_accuracy: 0.7300 - val_loss: 0.9625 - learning_rate: 1.0000e-06
Epoch 48/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9046 - loss: 0.3635 - val_accuracy: 0.7360 - val_loss: 0.9527 - learning_rate: 1.0000e-06
Epoch 49/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9094 - loss: 0.3518 - val_accuracy: 0.7340 - val_loss: 0.9572 - learning_rate: 1.0000e-06
Epoch 50/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.9064 - loss: 0.3591 - val_accuracy: 0.7300 - val_loss: 0.9560 - learning_rate: 1.0000e-06
Epoch 51/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.9090 - loss: 0.3502 - val_accuracy: 0.7400 - val_loss: 0.9564 - learning_rate: 1.0000e-06
Epoch 52/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.9077 - loss: 0.3544 - val_accuracy: 0.7360 - val_loss: 0.9532 - learning_rate: 1.0000e-07
Epoch 53/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.9042 - loss: 0.3629 - val_accuracy: 0.7360 - val_loss: 0.9504 - learning_rate: 1.0000e-07
Epoch 54/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9123 - loss: 0.3479 - val_accuracy: 0.7360 - val_loss: 0.9548 - learning_rate: 1.0000e-07
Epoch 55/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9048 - loss: 0.3547 - val_accuracy: 0.7360 - val_loss: 0.9556 - learning_rate: 1.0000e-07
Epoch 56/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.9052 - loss: 0.3618 - val_accuracy: 0.7400 - val_loss: 0.9526 - learning_rate: 1.0000e-07
Epoch 57/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.9106 - loss: 0.3515 - val_accuracy: 0.7380 - val_loss: 0.9548 - learning_rate: 1.0000e-07
Epoch 58/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9111 - loss: 0.3553 - val_accuracy: 0.7360 - val_loss: 0.9529 - learning_rate: 1.0000e-07
Epoch 59/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9102 - loss: 0.3455 - val_accuracy: 0.7360 - val_loss: 0.9511 - learning_rate: 1.0000e-07
Epoch 60/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9155 - loss: 0.3420 - val_accuracy: 0.7360 - val_loss: 0.9541 - learning_rate: 1.0000e-07
Epoch 61/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9101 - loss: 0.3536 - val_accuracy: 0.7380 - val_loss: 0.9518 - learning_rate: 1.0000e-07
Epoch 62/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9047 - loss: 0.3600 - val_accuracy: 0.7400 - val_loss: 0.9498 - learning_rate: 1.0000e-07
Epoch 63/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.9116 - loss: 0.3424 - val_accuracy: 0.7360 - val_loss: 0.9521 - learning_rate: 1.0000e-07
Epoch 64/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.9084 - loss: 0.3585 - val_accuracy: 0.7300 - val_loss: 0.9487 - learning_rate: 1.0000e-07
Epoch 65/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9102 - loss: 0.3400 - val_accuracy: 0.7360 - val_loss: 0.9530 - learning_rate: 1.0000e-07
Epoch 66/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9107 - loss: 0.3450 - val_accuracy: 0.7400 - val_loss: 0.9499 - learning_rate: 1.0000e-07
Epoch 67/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9084 - loss: 0.3617 - val_accuracy: 0.7360 - val_loss: 0.9468 - learning_rate: 1.0000e-07
Epoch 68/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9101 - loss: 0.3503 - val_accuracy: 0.7400 - val_loss: 0.9472 - learning_rate: 1.0000e-07
Epoch 69/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.9068 - loss: 0.3533 - val_accuracy: 0.7340 - val_loss: 0.9527 - learning_rate: 1.0000e-07
Epoch 70/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.9122 - loss: 0.3464 - val_accuracy: 0.7400 - val_loss: 0.9502 - learning_rate: 1.0000e-07
Epoch 71/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9111 - loss: 0.3513 - val_accuracy: 0.7380 - val_loss: 0.9517 - learning_rate: 1.0000e-07
Epoch 72/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9148 - loss: 0.3440 - val_accuracy: 0.7360 - val_loss: 0.9474 - learning_rate: 1.0000e-07
Epoch 73/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.9064 - loss: 0.3553 - val_accuracy: 0.7400 - val_loss: 0.9526 - learning_rate: 1.0000e-07
Epoch 74/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9067 - loss: 0.3598 - val_accuracy: 0.7420 - val_loss: 0.9513 - learning_rate: 1.0000e-07
Epoch 75/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.9082 - loss: 0.3532 - val_accuracy: 0.7380 - val_loss: 0.9490 - learning_rate: 1.0000e-07
Epoch 76/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 394ms/step - accuracy: 0.9100 - loss: 0.3408 - val_accuracy: 0.7380 - val_loss: 0.9486 - learning_rate: 1.0000e-07
Epoch 77/100
211/211 ━━━━━━━━━━━━━━━━━━━━ 84s 395ms/step - accuracy: 0.9128 - loss: 0.3474 - val_accuracy: 0.7400 - val_loss: 0.9516 - learning_rate: 1.0000e-07
In [ ]:
import matplotlib.pyplot as plt

def plot_results(history, title):
    epochs = range(1, len(history.history['accuracy']) + 1)


    fig, axs = plt.subplots(2, 1, figsize=(10, 10))

    axs[0].plot(epochs, history.history['loss'], label='Training Loss')
    axs[0].plot(epochs, history.history['val_loss'], label='Validation Loss')
    axs[0].set_title('Loss vs. Epochs')
    axs[0].set_xlabel('Epochs')
    axs[0].set_ylabel('Loss')
    axs[0].legend()

    axs[1].plot(epochs, history.history['accuracy'], label='Training Accuracy')
    axs[1].plot(epochs, history.history['val_accuracy'], label='Validation Accuracy')
    axs[1].set_title('Accuracy vs. Epochs')
    axs[1].set_xlabel('Epochs')
    axs[1].set_ylabel('Accuracy')
    axs[1].legend()

    plt.suptitle(title)
    plt.tight_layout()
    plt.show()

#
plot_results(history, 'Training and Validation Results')
No description has been provided for this image
In [ ]:
train_loss = history.history['loss']
train_accuracy = history.history['accuracy']
val_loss = history.history['val_loss']
val_accuracy = history.history['val_accuracy']

print(f"Final Training Loss: {train_loss[-1]}")
print(f"Final Training Accuracy: {train_accuracy[-1]}")
print(f"Final Validation Loss: {val_loss[-1]}")
print(f"Final Validation Accuracy: {val_accuracy[-1]}")
Final Training Loss: 0.34832096099853516
Final Training Accuracy: 0.9106878042221069
Final Validation Loss: 0.9515893459320068
Final Validation Accuracy: 0.7400000095367432
In [ ]:
# Evaluate the model on the test set
test_acc = model.evaluate(test_generator)[1]
test_loss = model.evaluate(test_generator)[0]
print(f"Test Loss: {test_loss}")
print(f"Test Accuracy: {test_acc}")
1/8 ━━━━━━━━━━━━━━━━━━━━ 1s 230ms/step - accuracy: 0.7500 - loss: 0.7800
/usr/local/lib/python3.10/dist-packages/keras/src/trainers/data_adapters/py_dataset_adapter.py:121: UserWarning: Your `PyDataset` class should call `super().__init__(**kwargs)` in its constructor. `**kwargs` can include `workers`, `use_multiprocessing`, `max_queue_size`. Do not pass these arguments to `fit()`, as they will be ignored.
  self._warn_if_super_not_called()
8/8 ━━━━━━━━━━━━━━━━━━━━ 1s 114ms/step - accuracy: 0.7596 - loss: 0.8040
8/8 ━━━━━━━━━━━━━━━━━━━━ 1s 113ms/step - accuracy: 0.7586 - loss: 0.8351
Test Loss: 0.8152586817741394
Test Accuracy: 0.7639999985694885
In [ ]: